No transformation required
## [1] 0.1919047
## [1] -0.2269461
# I don't like such strong transformations. Will use 0 instead
enplanements %>% BoxCox(lambda=0) %>% autoplot## [1] 0.1313326
retaildata <- read.csv("retail.csv")
mytimeseries <- ts(retaildata[,4], frequency=12, start=c(1982,4))
(lambda <- BoxCox.lambda(mytimeseries))## [1] -0.3912717
Now try it on the training/test split
train <- window(mytimeseries, end=c(2010,12))
test <- window(mytimeseries, start=2011)
fc <- stlf(train, lambda=lambda)
autoplot(train) +
autolayer(fc) +autolayer(test, series="Tests")## ME RMSE MAE MPE MAPE MASE
## Training set -0.3506253 8.334542 5.215022 -0.1703858 2.904052 0.2696797
## Test set -9.7437987 13.250443 10.684962 -4.9152571 5.367077 0.5525417
## ACF1 Theil's U
## Training set 0.03921138 NA
## Test set 0.23659054 0.6344392
fit <- auto.arima(wmurders, lambda=0, stepwise=FALSE, approximation=FALSE, max.p=10, max.q=10, max.order=10)
checkresiduals(fit)##
## Ljung-Box test
##
## data: Residuals from ARIMA(1,2,1)
## Q* = 10.562, df = 8, p-value = 0.2278
##
## Model df: 2. Total lags used: 10
## Series: train
## ARIMA(1,1,0)(1,1,0)[12]
## Box Cox transformation: lambda= -0.2741334
##
## Coefficients:
## ar1 sar1
## -0.2188 -0.3834
## s.e. 0.0539 0.0510
##
## sigma^2 estimated as 0.0001804: log likelihood=960.12
## AIC=-1914.23 AICc=-1914.16 BIC=-1902.82
## Series: train
## ARIMA(3,1,0)(2,1,0)[12]
## Box Cox transformation: lambda= -0.2741334
##
## Coefficients:
## ar1 ar2 ar3 sar1 sar2
## -0.2192 -0.0053 0.1307 -0.5164 -0.3482
## s.e. 0.0552 0.0559 0.0545 0.0527 0.0523
##
## sigma^2 estimated as 0.0001574: log likelihood=982.76
## AIC=-1953.51 AICc=-1953.26 BIC=-1930.68
## ETS(M,Ad,M)
##
## Call:
## ets(y = train)
##
## Smoothing parameters:
## alpha = 0.8031
## beta = 0.0049
## gamma = 1e-04
## phi = 0.9767
##
## Initial states:
## l = 63.945
## b = 0.2145
## s = 0.9895 0.9373 1.0084 1.1736 1.0097 1.0174
## 0.9782 0.9979 0.9931 0.9466 0.9759 0.9725
##
## sigma: 0.0457
##
## AIC AICc BIC
## 3383.427 3385.526 3452.611
f1 <- snaive(train, h=length(test))
f2 <- hw(train, h=length(test), seasonal='multi')
f3 <- forecast(etsmod, h=length(test))
f4 <- stlf(train, lambda=lambda, h=length(test))
f5 <- forecast(arimamod, h=length(test))##
## Ljung-Box test
##
## data: Residuals from Seasonal naive method
## Q* = 1256.8, df = 24, p-value < 2.2e-16
##
## Model df: 0. Total lags used: 24
##
## Ljung-Box test
##
## data: Residuals from Holt-Winters' multiplicative method
## Q* = 43.167, df = 8, p-value = 8.171e-07
##
## Model df: 16. Total lags used: 24
##
## Ljung-Box test
##
## data: Residuals from ETS(M,Ad,M)
## Q* = 40.992, df = 7, p-value = 8.126e-07
##
## Model df: 17. Total lags used: 24
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(M,A,N)
## Q* = 91.221, df = 20, p-value = 4.531e-11
##
## Model df: 4. Total lags used: 24
##
## Ljung-Box test
##
## data: Residuals from ARIMA(3,1,0)(2,1,0)[12]
## Q* = 54.96, df = 19, p-value = 2.357e-05
##
## Model df: 5. Total lags used: 24
## [1] 23.40458
## [1] 59.85277
## [1] 25.43061
## [1] 34.82789
## [1] 105.5313